from eo_utils import *
backend = "https://openeo.cloud"
conn = openeo.connect(backend).authenticate_oidc("egi")
Authenticated using refresh token.
center = [46.49, 11.35]
zoom = 12
eoMap = openeoMap(center,zoom)
eoMap.map
Get the bounding box from the previous map.
bbox = eoMap.getBbox()
print('west',bbox[0],'\neast',bbox[2],'\nsouth',bbox[1],'\nnorth',bbox[3])
west 11.283302 east 11.402779 south 46.455283 north 46.525958

[1] Define range of interest in time for Sentinel-1
collection = 'SENTINEL1_GRD'
spatial_extent = {'west':bbox[0],'east':bbox[2],'south':bbox[1],'north':bbox[3],'crs':'EPSG:4326'}
temporal_extent = ["2021-01-07", "2021-01-08"]
bands = ["VV","VH"]
s1 = conn.load_collection(collection,spatial_extent=spatial_extent,bands=bands,temporal_extent=temporal_extent)
[2] Apply the CARDL compliant SAR processing and conversion to dB:
s1bs = s1.ard_normalized_radar_backscatter(elevation_model="COPERNICUS_30").apply(lambda x: 10 * x.log(base=10))
[3] Save process as final step, here we use NetCDF as output format.
s1bs_netcdf = s1bs.save_result(format="NetCDF")
[4] Now we will create a batch job and start it. More info about batch jobs can be found here:
https://openeo.org/documentation/1.0/glossary.html#data-processing-modes
https://open-eo.github.io/openeo-python-client/batch_jobs.html
job_s1 = s1bs_netcdf.send_job(title="S1_backscatter_SH")
job_id_s1 = job_s1.job_id
if job_id_s1:
print("Batch job created with id: ",job_id_s1)
job_s1.start_job()
else:
print("Error! Job ID is None")
Batch job created with id: vito-3fc7a6fd-627d-4cce-96cb-2cd6b73f3f49
If our area of interest is small, we can also do a direct request, but this will not return the CARD4L json metadata.
Note that this step automatically adds the save_result process at the end based on the output format we choose.
%time s1bs.download("./data/US3/card4l_out.nc",format="NetCDF")
Wall time: 3min 32s
We can get a description of the job and check its status.
job_s1 = conn.job(job_id_s1)
job_description = job_s1.describe_job()
print("Batch job with id: ",job_id_s1, ' is ',job_description['status'])
Batch job with id: vito-3bbc3638-9ef0-4981-ab3a-6e6dc5dba912 is finished
results = job_s1.get_results()
results
CARD4L results contain STAC metadata and the requested image.
We can simply download everything, for inspection. Please note: this will download also the original S1 GRD files, which can take a lot of time for a big area or a timeseries.
# results.download_files()
The downloaded data can be opened, but these are fairly large files, making visualization more difficult.
The foreseen way of interacting with it is to use openEO to further process the dataset into a more manageable result.
# sar = xr.open_rasterio('s1_rtc_02F8D2_N46E011_2021_01_02_MULTIBAND.tif')
# sar
<xarray.DataArray (band: 4, y: 5000, x: 5000)>
[100000000 values with dtype=float32]
Coordinates:
* band (band) int64 1 2 3 4
* y (y) float64 47.0 47.0 47.0 47.0 47.0 ... 46.0 46.0 46.0 46.0 46.0
* x (x) float64 11.0 11.0 11.0 11.0 11.0 ... 12.0 12.0 12.0 12.0 12.0
Attributes:
transform: (0.0002, 0.0, 11.0, 0.0, -0.0002, 47.0)
crs: +init=epsg:4326
res: (0.0002, 0.0002)
is_tiled: 1
nodatavals: (nan, nan, nan, nan)
scales: (1.0, 1.0, 1.0, 1.0)
offsets: (0.0, 0.0, 0.0, 0.0)
AREA_OR_POINT: Area
TIFFTAG_RESOLUTIONUNIT: 1 (unitless)
TIFFTAG_XRESOLUTION: 1
TIFFTAG_YRESOLUTION: 1[100000000 values with dtype=float32]
array([1, 2, 3, 4])
array([46.9999, 46.9997, 46.9995, ..., 46.0005, 46.0003, 46.0001])
array([11.0001, 11.0003, 11.0005, ..., 11.9995, 11.9997, 11.9999])
S1_ard = xr.open_dataset("./data/US3/card4l_out.nc")
S1_ard
<xarray.Dataset>
Dimensions: (t: 2, x: 1280, y: 1024)
Coordinates:
* t (t) datetime64[ns] 2021-01-08 2021-01-07
* x (x) float64 6.733e+05 6.733e+05 ... 6.861e+05
* y (y) float64 5.156e+06 5.156e+06 ... 5.146e+06
Data variables:
crs |S1 b''
VV (t, y, x) float32 ...
VH (t, y, x) float32 ...
mask (t, y, x) float32 ...
local_incidence_angle (t, y, x) float32 ...
Attributes:
Conventions: CF-1.8
institution: openEO platformarray(['2021-01-08T00:00:00.000000000', '2021-01-07T00:00:00.000000000'],
dtype='datetime64[ns]')array([673285., 673295., 673305., ..., 686055., 686065., 686075.])
array([5156355., 5156345., 5156335., ..., 5146145., 5146135., 5146125.])
array(b'', dtype='|S1')
[2621440 values with dtype=float32]
[2621440 values with dtype=float32]
[2621440 values with dtype=float32]
[2621440 values with dtype=float32]
fig, (ax1, ax2) = plt.subplots(1,2,figsize=(20,20))
ax1.imshow(S1_ard.VV[0].values,cmap='Greys_r',vmin=-30,vmax=30)
ax1.set_title('VV gamma0')
ax2.imshow(S1_ard.VH[0].values,cmap='Greys_r',vmin=-30,vmax=30)
ax2.set_title('VH gamma0')
Text(0.5, 1.0, 'VH gamma0')
collection = 'SENTINEL1_GRD'
spatial_extent = {'west':bbox[0],'east':bbox[2],'south':bbox[1],'north':bbox[3],'crs':'EPSG:4326'}
temporal_extent = ["2021-01-07", "2021-01-08"]
bands = ["VV","VH"]
s1 = conn.load_collection(collection,spatial_extent=spatial_extent,bands=bands,temporal_extent=temporal_extent)
s1_sigma = s1.sar_backscatter(coefficient='sigma0-ellipsoid').apply(lambda x: 10 * x.log(base=10))
%time s1_sigma.download("./data/US3/s1_sigma.nc",format="NetCDF")
Wall time: 2min 46s
s1_gamma = s1.sar_backscatter(coefficient='gamma0-ellipsoid').apply(lambda x: 10 * x.log(base=10))
%time s1_gamma.download("./data/US3/s1_gamma.nc",format="NetCDF")
Wall time: 2min 47s
s1_sigma0 = xr.open_dataset('./data/US3/s1_sigma.nc')
s1_gamma0 = xr.open_dataset('./data/US3/s1_gamma.nc')
fig, (ax1, ax2) = plt.subplots(1,2,figsize=(20,20))
ax1.imshow(s1_sigma0.VV[1].values,cmap='Greys_r',vmin=-30,vmax=30)
ax1.set_title('Sigma VV')
ax2.imshow(s1_sigma0.VH[1].values,cmap='Greys_r',vmin=-30,vmax=30)
ax2.set_title('Sigma VH')
Text(0.5, 1.0, 'Sigma VH')
fig, (ax1, ax2) = plt.subplots(1,2,figsize=(20,20))
ax1.imshow(s1_gamma0.VV[1].values,cmap='Greys_r',vmin=-30,vmax=30)
ax1.set_title('Gamma VV')
ax2.imshow(s1_gamma0.VH[1].values,cmap='Greys_r',vmin=-30,vmax=30)
ax2.set_title('Gamma VH')
Text(0.5, 1.0, 'Gamma VH')
fig, (ax1, ax2) = plt.subplots(1,2,figsize=(20,20))
ax1.imshow(s1_sigma0.VV[0].values/s1_gamma0.VV[0].values,cmap='Greys_r',vmin=0,vmax=1)
ax1.set_title('Sigma/Gamma VV')
ax2.imshow(s1_sigma0.VH[0].values/s1_gamma0.VH[0].values,cmap='Greys_r',vmin=0,vmax=1)
ax2.set_title('Sigma/Gamma VH')
Text(0.5, 1.0, 'Sigma/Gamma VH')
backend = "https://openeo.cloud"
conn = openeo.connect(backend).authenticate_oidc("egi")
Authenticated using refresh token.
W, S = 510000, 5680000
bbox = {
"west": W, "east": W + 32 * 10,
"south": S, "north": S + 32 * 10,
"crs": 32631
}
dates = ("2020-05-06T00:00:00", "2020-09-30T00:00:00")
def backscatter(connection):
return (connection.load_collection("SENTINEL1_GRD",spatial_extent=bbox,temporal_extent=dates,bands=["VV","VH"])
.sar_backscatter(coefficient="sigma0-ellipsoid")
.apply(lambda x: 10 * x.log(base=10)))
%%time
backscatter(conn).download("./data/US3/shub-series.nc", format="NetCDF")
CPU times: user 32 ms, sys: 12 ms, total: 44 ms Wall time: 15.1 s
%%time
asc = (conn.load_collection("S1_GRD_SIGMA0_ASCENDING").filter_bbox(**bbox)
.filter_temporal(dates)
.filter_bands(["VH", "VV"]))
desc = (conn.load_collection("S1_GRD_SIGMA0_DESCENDING").filter_bbox(**bbox)
.filter_temporal(dates)
.filter_bands(["VH", "VV"]))
desc.merge_cubes(asc,overlap_resolver="max").apply(lambda x: 10 * x.log(base=10)).download("./data/US3/snap-series.nc", format="NetCDF")
CPU times: user 20 ms, sys: 8 ms, total: 28 ms Wall time: 14 s
shub_ts = xr.open_dataset("./data/US3/shub-series.nc")
snap_ts = xr.open_dataset("./data/US3/snap-series.nc")
Compute the mean value, for each timestamp. OpenEO can also do this for you.
xr.merge([shub_ts.rename({"VV": "VV_SHUB", "VH":"VH_SHUB"}), snap_ts.rename({"VV": "VV_SNAP", "VH":"VH_SNAP"})]).mean(dim=['x','y']).hvplot(width=1000,height=600)

Connect to openEO Platform and authenticate using EGI
backend = "https://openeo.cloud"
conn = openeo.connect(backend).authenticate_oidc(provider_id='egi')
Authenticated using refresh token.
Explore the pre-computed data and find the available dates over our area of interest:
itemIter = conn.collection_items(
"gamma0_sentinel_1_dv",
temporal_extent = ["2017-03-14", "2017-03-21"],
spatial_extent = bbox
)
next(itemIter)
[1] Define range of interest in time for Sentinel-1
collection = 's1a_csar_grdh_iw'
spatial_extent = {'west':bbox[0],'east':bbox[2],'south':bbox[1],'north':bbox[3],'crs':'EPSG:4326'}
temporal_extent = ["2017-03-19", "2017-03-19"]
bands = ["VV+VH"]
s1 = conn.load_collection(collection,spatial_extent=spatial_extent,bands=bands,temporal_extent=temporal_extent)
[2] Apply the CARDL compliant SAR processing.
s1bs = s1.ard_normalized_radar_backscatter(elevation_model="COPERNICUS_30")
# s1bs = s1.sar_backscatter()
[3] Save process as final step, here we use GeoTIFF as output format.
s1bs_tif = s1bs.save_result(format="GTiff")
[4] Now we will create a batch job and start it.
job_snap = conn.create_job(s1bs_tif)
job_id_snap = job_snap.job_id
if job_id_snap:
print("Batch job created with id: ",job_id_snap)
job_snap.start_job()
else:
print("Error! Job ID is None")
job_snap = eodc_conn.job(job_id_snap)
job_description = job_snap.describe_job()
print("Batch job with id: ",job_id_snap, ' is ',job_description['status'])
results = job_snap.get_results()
results
results.download_file("./data/US3/card4l_snap.tif")
S1_ard_snap = xr.open_dataset("./data/US3/card4l_snap.tif")
S1_ard_snap
<xarray.Dataset>
Dimensions: (t: 2, x: 918, y: 732)
Coordinates:
* t (t) datetime64[ns] 2021-01-07 2021-01-08
* x (x) float64 6.732e+05 6.732e+05 ... 6.823e+05
* y (y) float64 5.148e+06 5.148e+06 ... 5.156e+06
Data variables:
VH (t, y, x) float32 ...
VV (t, y, x) float32 ...
local_incidence_angle (t, y, x) float32 ...
mask (t, y, x) float32 ...
Attributes:
nodata: 0.0
crs: +proj=utm +zone=32 +datum=WGS84 +units=m +no_defs array(['2021-01-07T00:00:00.000000000', '2021-01-08T00:00:00.000000000'],
dtype='datetime64[ns]')array([673175., 673185., 673195., ..., 682325., 682335., 682345.])
array([5148225., 5148235., 5148245., ..., 5155515., 5155525., 5155535.])
[1343952 values with dtype=float32]
[1343952 values with dtype=float32]
[1343952 values with dtype=float32]
[1343952 values with dtype=float32]
fig, (ax1, ax2) = plt.subplots(1,2,figsize=(20,20))
ax1.imshow(np.flipud(10* np.log10(S1_ard_snap.VV[1].values)),cmap='Greys_r',vmin=-30,vmax=30)
ax1.set_title('VV gamma0 SNAP')
ax2.imshow(np.flipud(10* np.log10(S1_ard_snap.VH[1].values)),cmap='Greys_r',vmin=-30,vmax=30)
ax2.set_title('VH gamma0 SNAP')
Text(0.5, 1.0, 'VH gamma0')